Data Cleaning - Geospatial Data
# specify the directory containing the CSV files
directory = '../../data/quarterly_financials'
# create an empty list to store the dataframes
dfs = []
# loop over the CSV files in the directory
for filename in os.listdir(directory):
if filename.endswith('.csv'):
# read the CSV file into a dataframe and append it to the list
path = os.path.join(directory, filename)
df = pd.read_csv(path)
dfs.append(df)
# concatenate the dataframes into a single dataframe
quarterly_financials = pd.concat(dfs, ignore_index=True)
# sort
quarterly_financials = quarterly_financials.sort_values(
by=['REPDTE', 'CERT'], ascending=[False, True])
quarterly_financials = quarterly_financials.reset_index(drop=True)# print the combined dataframe
display(quarterly_financials.shape)
display(quarterly_financials.head())(1014549, 16)
| ZIP | BKCLASS | EQR | REPDTE | DEPDOM | ASSET | STNAME | EQ | NAME | CITY | ADDRESS | ENDEFYMD | CERT | ESTYMD | LIAB | ID | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 2111 | SM | 8.918529 | 20221231 | 163284000.0 | 298020000 | MASSACHUSETTS | 26579000.0 | STATE STREET BANK&TRUST CO | BOSTON | 1 LINCOLN ST | 99991231.0 | 14 | 17920101 | 271441000.0 | 14_20221231 |
| 1 | 36830 | SM | 6.446081 | 20221231 | 952037.0 | 1023366 | ALABAMA | 65967.0 | AUBURNBANK | AUBURN | 100 N GAY ST | 99991231.0 | 35 | 19070103 | 957399.0 | 35_20221231 |
| 2 | 36732 | NM | 7.323604 | 20221231 | 407949.0 | 444822 | ALABAMA | 32577.0 | ROBERTSON BANKING CO | DEMOPOLIS | 216 N WALNUT AVE | 99991231.0 | 39 | 18700101 | 412245.0 | 39_20221231 |
| 3 | 36867 | NM | -3.191064 | 20221231 | 266874.0 | 265272 | ALABAMA | -8465.0 | PHENIX-GIRARD BANK | PHENIX CITY | 801 13TH ST | 99991231.0 | 41 | 19040504 | 273737.0 | 41_20221231 |
| 4 | 36401 | NM | 7.197104 | 20221231 | 70649.0 | 76239 | ALABAMA | 5487.0 | BANK OF EVERGREEN | EVERGREEN | 146 W FRONT ST | 99991231.0 | 49 | 19320901 | 70752.0 | 49_20221231 |
display(len(quarterly_financials['BKCLASS'].unique()))
display(quarterly_financials['BKCLASS'].unique())7
array(['SM', 'NM', 'N', 'SI', 'SB', 'SL', 'OI'], dtype=object)
Zip coordinates
# bring in
zip_coordinates = pd.read_csv(
'../../data/coordinate_data/2022_Gaz_zcta_national.txt', sep='\t')
# remove whitespace in col names
zip_coordinates.columns = [col.strip() for col in zip_coordinates.columns]
# drop unnecesary columns
zip_coordinates.drop(['ALAND', 'ALAND_SQMI', 'AWATER',
'AWATER_SQMI'], axis=1, inplace=True)
# check
display(zip_coordinates.shape)
display(zip_coordinates.head())(33791, 3)
| GEOID | INTPTLAT | INTPTLONG | |
|---|---|---|---|
| 0 | 601 | 18.180555 | -66.749961 |
| 1 | 602 | 18.361945 | -67.175597 |
| 2 | 603 | 18.457399 | -67.124867 |
| 3 | 606 | 18.158327 | -66.932928 |
| 4 | 610 | 18.293960 | -67.127182 |
Combine
# Rename the 'GEOID' column in 'zip_coordinates' to match the 'ZIP' column in 'quarterly_financials'
zip_coordinates = zip_coordinates.rename(columns={'GEOID': 'ZIP'})
# Merge the two dataframes on the 'ZIP' column
merged_df = quarterly_financials.merge(zip_coordinates, on='ZIP', how='left')
# Create a new column 'coordinates' with the combined 'INTPTLAT' and 'INTPTLONG' columns as a tuple
merged_df['zip_coordinates'] = list(
zip(merged_df['INTPTLAT'], merged_df['INTPTLONG']))
# Update the 'quarterly_financials' dataframe with the new 'coordinates' column
quarterly_financials = merged_df
# Print the updated 'quarterly_financials' dataframe
display(quarterly_financials.shape)
display(quarterly_financials.head())(1014549, 19)
| ZIP | BKCLASS | EQR | REPDTE | DEPDOM | ASSET | STNAME | EQ | NAME | CITY | ADDRESS | ENDEFYMD | CERT | ESTYMD | LIAB | ID | INTPTLAT | INTPTLONG | zip_coordinates | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 2111 | SM | 8.918529 | 20221231 | 163284000.0 | 298020000 | MASSACHUSETTS | 26579000.0 | STATE STREET BANK&TRUST CO | BOSTON | 1 LINCOLN ST | 99991231.0 | 14 | 17920101 | 271441000.0 | 14_20221231 | 42.350680 | -71.060527 | (42.35068, -71.060527) |
| 1 | 36830 | SM | 6.446081 | 20221231 | 952037.0 | 1023366 | ALABAMA | 65967.0 | AUBURNBANK | AUBURN | 100 N GAY ST | 99991231.0 | 35 | 19070103 | 957399.0 | 35_20221231 | 32.534872 | -85.493755 | (32.534872, -85.493755) |
| 2 | 36732 | NM | 7.323604 | 20221231 | 407949.0 | 444822 | ALABAMA | 32577.0 | ROBERTSON BANKING CO | DEMOPOLIS | 216 N WALNUT AVE | 99991231.0 | 39 | 18700101 | 412245.0 | 39_20221231 | 32.417456 | -87.892213 | (32.417456, -87.892213) |
| 3 | 36867 | NM | -3.191064 | 20221231 | 266874.0 | 265272 | ALABAMA | -8465.0 | PHENIX-GIRARD BANK | PHENIX CITY | 801 13TH ST | 99991231.0 | 41 | 19040504 | 273737.0 | 41_20221231 | 32.498054 | -85.023590 | (32.498054, -85.02359) |
| 4 | 36401 | NM | 7.197104 | 20221231 | 70649.0 | 76239 | ALABAMA | 5487.0 | BANK OF EVERGREEN | EVERGREEN | 146 W FRONT ST | 99991231.0 | 49 | 19320901 | 70752.0 | 49_20221231 | 31.468970 | -86.950426 | (31.46897, -86.950426) |
# removed unmatched zips
quarterly_financials.dropna(subset=['INTPTLAT'], inplace=True)
# drop unnecesary columns
quarterly_financials.drop(['INTPTLAT', 'INTPTLONG', 'ENDEFYMD', 'CERT',
'ESTYMD', 'LIAB', 'ID', 'ADDRESS', 'EQ', 'ZIP'], axis=1, inplace=True)
# convert numerical values to millions (source is in thousands)
quarterly_financials['DEPDOM'] = quarterly_financials['DEPDOM'].divide(1000)
quarterly_financials['ASSET'] = quarterly_financials['ASSET'].divide(1000)
# rename
quarterly_financials.rename(columns={
'BKCLASS': 'bank_class',
'REPDTE': 'report_date',
'DEPDOM': 'deposits_mill',
'ASSET': 'assets_mill',
'STNAME': 'state',
'NAME': 'name',
'CITY': 'city'
}, inplace=True)
display(quarterly_financials.head())| bank_class | EQR | report_date | deposits_mill | assets_mill | state | name | city | zip_coordinates | |
|---|---|---|---|---|---|---|---|---|---|
| 0 | SM | 8.918529 | 20221231 | 163284.000 | 298020.000 | MASSACHUSETTS | STATE STREET BANK&TRUST CO | BOSTON | (42.35068, -71.060527) |
| 1 | SM | 6.446081 | 20221231 | 952.037 | 1023.366 | ALABAMA | AUBURNBANK | AUBURN | (32.534872, -85.493755) |
| 2 | NM | 7.323604 | 20221231 | 407.949 | 444.822 | ALABAMA | ROBERTSON BANKING CO | DEMOPOLIS | (32.417456, -87.892213) |
| 3 | NM | -3.191064 | 20221231 | 266.874 | 265.272 | ALABAMA | PHENIX-GIRARD BANK | PHENIX CITY | (32.498054, -85.02359) |
| 4 | NM | 7.197104 | 20221231 | 70.649 | 76.239 | ALABAMA | BANK OF EVERGREEN | EVERGREEN | (31.46897, -86.950426) |
# change to title case
quarterly_financials[['state', 'name', 'city']] = quarterly_financials[[
'state', 'name', 'city']].apply(lambda x: x.str.title())
# format date
quarterly_financials['report_date'] = pd.to_datetime(
quarterly_financials['report_date'], format='%Y%m%d')
# set to float
quarterly_financials['deposits_mill'] = quarterly_financials['deposits_mill'].astype(
float)
quarterly_financials['assets_mill'] = quarterly_financials['assets_mill'].astype(
float)
display(quarterly_financials.head())| bank_class | EQR | report_date | deposits_mill | assets_mill | state | name | city | zip_coordinates | |
|---|---|---|---|---|---|---|---|---|---|
| 0 | SM | 8.918529 | 2022-12-31 | 163284.000 | 298020.000 | Massachusetts | State Street Bank&Trust Co | Boston | (42.35068, -71.060527) |
| 1 | SM | 6.446081 | 2022-12-31 | 952.037 | 1023.366 | Alabama | Auburnbank | Auburn | (32.534872, -85.493755) |
| 2 | NM | 7.323604 | 2022-12-31 | 407.949 | 444.822 | Alabama | Robertson Banking Co | Demopolis | (32.417456, -87.892213) |
| 3 | NM | -3.191064 | 2022-12-31 | 266.874 | 265.272 | Alabama | Phenix-Girard Bank | Phenix City | (32.498054, -85.02359) |
| 4 | NM | 7.197104 | 2022-12-31 | 70.649 | 76.239 | Alabama | Bank Of Evergreen | Evergreen | (31.46897, -86.950426) |
quarterly_financials['bank_class'] = quarterly_financials['bank_class'].replace({
'N': 'Commercial bank, national charter, Fed member',
'NM': 'Commercial bank, state charter, Fed non-member',
'OI': 'Insured U.S. branch of a foreign chartered institution',
'SB': 'Federal savings banks',
'SI': 'State chartered stock savings banks',
'SL': 'State chartered stock savings and loan association',
'SM': 'Commercial bank, state charter, Fed member',
'NC': 'Noninsured non-deposit commercial bank',
'NS': 'Noninsured stock savings bank',
'CU': 'State or federally chartered credit union',
})
display(quarterly_financials.shape)
display(quarterly_financials.head())(979599, 9)
| bank_class | EQR | report_date | deposits_mill | assets_mill | state | name | city | zip_coordinates | |
|---|---|---|---|---|---|---|---|---|---|
| 0 | Commercial bank, state charter, Fed member | 8.918529 | 2022-12-31 | 163284.000 | 298020.000 | Massachusetts | State Street Bank&Trust Co | Boston | (42.35068, -71.060527) |
| 1 | Commercial bank, state charter, Fed member | 6.446081 | 2022-12-31 | 952.037 | 1023.366 | Alabama | Auburnbank | Auburn | (32.534872, -85.493755) |
| 2 | Commercial bank, state charter, Fed non-member | 7.323604 | 2022-12-31 | 407.949 | 444.822 | Alabama | Robertson Banking Co | Demopolis | (32.417456, -87.892213) |
| 3 | Commercial bank, state charter, Fed non-member | -3.191064 | 2022-12-31 | 266.874 | 265.272 | Alabama | Phenix-Girard Bank | Phenix City | (32.498054, -85.02359) |
| 4 | Commercial bank, state charter, Fed non-member | 7.197104 | 2022-12-31 | 70.649 | 76.239 | Alabama | Bank Of Evergreen | Evergreen | (31.46897, -86.950426) |
Growth of Assets Per State
# convert financials to geodataframe
gdf = gpd.GeoDataFrame(quarterly_financials, geometry=gpd.points_from_xy(quarterly_financials.zip_coordinates.apply(
lambda p: p[1]), quarterly_financials.zip_coordinates.apply(lambda p: p[0])))
# define a function to get the aggregate assets
def plot_bank_assets_by_date(date):
# filter geodataframe and group by state
filtered_gdf = gdf[gdf['report_date'] == date]
state_assets = filtered_gdf.groupby(
'state')['assets_mill'].sum().reset_index()
# create base folium map
m = folium.Map(location=[37.8, -96], zoom_start=3)
# function to scale the bubbles
def scale_bubble_size(assets):
return assets / 50000
# plot the bubbles
for index, row in state_assets.iterrows():
state_data = filtered_gdf[filtered_gdf['state'] == row['state']]
state_centroid = state_data.unary_union.centroid
folium.CircleMarker(
location=[state_centroid.y, state_centroid.x],
radius=scale_bubble_size(row['assets_mill']),
color='#118C4F',
fill=True,
fill_color='#118C4F',
fill_opacity=0.5,
popup=f"State: {row['state']}<br>Total Assets: {row['assets_mill']:.0f}<br>Date: {date}"
).add_to(m)
# show
return m
# define dates to show in the grid
unique_dates = ['1992-12-31', '2002-12-31', '2012-12-31', '2022-12-31']
# init maps
maps = []
for date in unique_dates:
maps.append(plot_bank_assets_by_date(pd.to_datetime(date)))
# for rendering in HTML
def folium_map_to_iframe_data(m, map_name):
map_path = f'geospatial_data_html/{map_name}.html'
m.save(map_path)
iframe = IFrame(src=map_path, width=350, height=300)
return iframe
# init html structure
html_structure = """
<style>
.map-container {
display: flex;
flex-wrap: wrap;
}
.map-box {
width: 50%;
padding: 5px;
box-sizing: border-box;
}
.map-title {
text-align: center;
font-weight: bold;
}
</style>
<div class="map-container">
"""
# add iframes to the html
for i, m in enumerate(maps):
map_name = f'map_{i}'
iframe_data = folium_map_to_iframe_data(m, map_name)
date = unique_dates[i]
html_structure += f'<div class="map-box"><div class="map-title">{date}</div>{iframe_data._repr_html_()}</div>'
# close html
html_structure += '</div>'
# show
display(HTML(html_structure))
# alternative for checking in jupyter notebook
if True == False: # only if required
def folium_map_to_iframe_data(m):
data = BytesIO()
m.save(data, close_file=False)
data.seek(0)
b64 = base64.b64encode(data.read()).decode("utf-8")
return f'<iframe src="data:text/html;base64,{b64}" width="350" height="300"></iframe>'
table_html = '<table><tr>'
for i, m in enumerate(maps):
iframe_data = folium_map_to_iframe_data(m)
table_html += f'<td><h3>{unique_dates[i]}</h3>{iframe_data}</td>'
if (i + 1) % 2 == 0:
table_html += '</tr><tr>'
table_html += '</tr></table>'
display(HTML(table_html))1992-12-31
2002-12-31
2012-12-31
2022-12-31
Individual Points
Heatmap all dates
Latest date only
# get the latest date
latest_date = quarterly_financials['report_date'].max()
# filter dataframe
latest_quarterly_financials = quarterly_financials[quarterly_financials['report_date'] == latest_date]
# create base folium map
map = folium.Map(location=[37.8, -96], zoom_start=4)
# heatmap data setup
heatmap_data = []
for index, row in latest_quarterly_financials.iterrows():
coordinates = row['zip_coordinates']
assets_mill = row['assets_mill']
heatmap_data.append((*coordinates, assets_mill))
# add heatmap to folium
heatmap = HeatMap(heatmap_data, radius=13, max_zoom=13, gradient={
0.2: 'blue', 0.4: 'lime', 0.6: 'orange', 1: 'red'})
map.add_child(heatmap)
# scale the bubbles
def scale_bubble_size(assets):
return assets / 50000
# add the circles with tootltips
for index, row in latest_quarterly_financials.iterrows():
coordinates = row['zip_coordinates']
assets_mill = row['assets_mill']
bank_name = row['name']
tooltip_text = f"Bank: {bank_name}<br>Coordinates: {coordinates}<br>Assets (millions): {assets_mill}"
folium.CircleMarker(
location=coordinates,
radius=scale_bubble_size(assets_mill),
color='#118C4F',
fill=True,
fill_color='#118C4F',
fill_opacity=0.5,
tooltip=tooltip_text,
).add_to(map)
# for rendering on Quarto
map.save('geospatial_data_html/heatmap.html')
IFrame(src='geospatial_data_html/heatmap.html', width=700, height=600)
# for rendering in Jupyter Notebook
display(map)Make this Notebook Trusted to load map: File -> Trust Notebook